In [1]:
# from google.colab import drive
# drive.mount('/content/drive')
# folderPath = "/content/drive/Othercomputers/z590/Colab Notebooks/Computer Vision/Project1"
folderPath = "."
In [416]:
import os
import glob
import cv2
import re
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
import keras
from keras.models import Sequential
from keras.models import Model
from keras.layers import Dense, Dropout, Conv2D, Flatten, MaxPool2D, GlobalMaxPooling2D, BatchNormalization, InputLayer
from keras.layers.convolutional import MaxPooling2D
from keras.optimizers import RMSprop
import tensorflow as tf
import matplotlib

from sklearn.decomposition import PCA as RandomizedPCA
from sklearn.svm import SVC
from sklearn.model_selection import learning_curve, GridSearchCV
from sklearn import svm, metrics
from sklearn.preprocessing import LabelBinarizer
%matplotlib inline
seed = 7
In [66]:
# Ignore the warnings
import warnings
warnings.filterwarnings("ignore")

1.A.

In [3]:
zipfile_path = os.path.join(folderPath, "plant-seedlings-classification")
zipfile_path
Out[3]:
'.\\plant-seedlings-classification'
In [4]:
from zipfile import ZipFile
with ZipFile(zipfile_path + '.zip', 'r') as z:
  print("Zip already extracted") if os.path.exists(zipfile_path) else z.extractall(path=folderPath)
Zip already extracted

1.B.

In [5]:
train_folder_path = "plant-seedlings-classification/train"
print(train_folder_path)
full_train_path = os.path.join(folderPath , train_folder_path)
print(full_train_path)
plant-seedlings-classification/train
.\plant-seedlings-classification/train
In [6]:
full_image_folder_paths = glob.glob(os.path.join(full_train_path , "*"))
full_image_folder_paths
Out[6]:
['.\\plant-seedlings-classification/train\\Black-grass',
 '.\\plant-seedlings-classification/train\\Charlock',
 '.\\plant-seedlings-classification/train\\Cleavers',
 '.\\plant-seedlings-classification/train\\Common Chickweed',
 '.\\plant-seedlings-classification/train\\Common wheat',
 '.\\plant-seedlings-classification/train\\Fat Hen',
 '.\\plant-seedlings-classification/train\\Loose Silky-bent',
 '.\\plant-seedlings-classification/train\\Maize',
 '.\\plant-seedlings-classification/train\\Scentless Mayweed',
 '.\\plant-seedlings-classification/train\\Shepherds Purse',
 '.\\plant-seedlings-classification/train\\Small-flowered Cranesbill',
 '.\\plant-seedlings-classification/train\\Sugar beet']
In [7]:
df = pd.DataFrame(columns=('name','species','image'))
for idx, image_folder_path in enumerate(full_image_folder_paths):
    label = re.split(r'[/\\]', image_folder_path)[-1]
    image_paths = glob.glob(image_folder_path + "/*")
    for image_path in image_paths:
        imgcv2 = cv2.imread(os.path.join(image_path), cv2.COLOR_BGR2RGB)
        df.loc[len(df)] = [re.split(r'[/\\]', image_path)[-1], label, imgcv2]
df.head()
Out[7]:
name species image
0 0050f38b3.png Black-grass [[[27, 50, 80], [18, 42, 71], [36, 57, 83], [4...
1 0183fdf68.png Black-grass [[[37, 43, 55], [37, 43, 54], [40, 46, 57], [4...
2 0260cffa8.png Black-grass [[[24, 32, 45], [21, 30, 44], [22, 30, 45], [2...
3 05eedce4d.png Black-grass [[[51, 84, 108], [56, 89, 112], [54, 88, 110],...
4 075d004bc.png Black-grass [[[165, 162, 162], [165, 161, 163], [160, 157,...

1.C.

In [8]:
image_col_name = 'image'
def n_random_image_species(n):
    perm = np.random.choice(len(df), size=n)

    for item in perm:
        plt.imshow(df[image_col_name][item])
        plt.axis("off")
        plt.show()
        print("Species: ", df['species'][item])
        print("shape: ", df[image_col_name][item].shape, '\n\n')
In [9]:
n_random_image_species(5)
Species:  Fat Hen
shape:  (178, 178, 3) 


Species:  Small-flowered Cranesbill
shape:  (848, 848, 3) 


Species:  Cleavers
shape:  (149, 149, 3) 


Species:  Cleavers
shape:  (178, 178, 3) 


Species:  Common wheat
shape:  (932, 932, 3) 


2.A. & 2.B.

In [10]:
x = df.image
In [11]:
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(df.species)
print(y.shape)
num_classes = len(np.unique(y))
num_classes
(4750,)
Out[11]:
12
In [12]:
y = keras.utils.to_categorical(y, num_classes=None, dtype='int')
In [13]:
y.shape
Out[13]:
(4750, 12)

2.C. & 2.D.

In [14]:
IMG_HEIGHT = 128
IMG_WIDTH = 128
In [15]:
img_data = []
for idx, item in enumerate(x):
    img = item
    print(img.size)
    img = cv2.resize(img, (IMG_HEIGHT, IMG_WIDTH), interpolation=cv2.INTER_CUBIC)
    img = np.array(img)
    img = np.float32(img)
    img /= 255 # /= 255 for normalisation
    img_data.append(img)
df['image_resized'] = img_data
115248
451632
2354988
41067
665523
3460428
189003
10824300
845883
371712
9815256
845883
5290752
375948
1030188
6023667
369603
2770563
1701027
1190700
1516563
7404123
18362028
1267500
783363
183027
61347
36300
10045146
406272
1037232
2679075
814323
1470000
657072
1232643
1407675
3460428
25947
6220800
248832
1090827
1920000
86700
54675
25947
1090827
1611867
480000
86700
3964050
2650800
3269808
662700
415152
823728
1221132
3276075
114075
1232643
1387200
1062075
95052
3276075
34347
111747
15987
73008
2958147
1825200
415152
2430000
662700
87723
554700
1090827
56307
26508
2344368
685452
1407675
3905643
1428300
2462508
6177675
3182700
369603
1825200
412923
2291628
1555200
79707
371712
21386700
68403
18922896
371712
756012
18164640
451632
43200
371712
43923
957675
1760268
23232
1132830
451632
264627
6220800
371712
1090827
1470000
38307
1326675
1555200
34992
3244800
845883
388608
685452
297675
5290752
2679075
1920000
7873200
18642960
1825200
35643
1407675
1149483
2898867
599427
484812
665523
433200
225228
694083
43200
74892
845883
57132
79707
2462508
823728
2430000
255792
1692003
765075
415152
98283
685452
107163
1625088
3551232
83667
1407675
49923
2887083
1171875
662700
723243
7873200
8376723
23763
554700
891075
2920320
2223963
395307
5290752
2851875
1872300
3874896
95052
97200
3033090
15411066
2920320
3054243
23232
1236492
480000
4385043
3830700
2898867
39675
1221132
116427
531723
28227
3776652
1470000
13060446
1642800
5156163
5290752
5968656
9098466
934092
64827
99372
3460428
91875
1232643
27648
1516563
3864675
49152
662700
26508
3689643
86700
599427
20172
18252
5156163
3712800
855468
842700
2462508
30000
1090827
580800
24843
567675
39675
16428
1568187
1374987
106032
16428
5231160
5156163
1083603
63075
480000
1568187
1642800
1683003
2096688
2430000
1470000
25392
2317923
3885132
369603
145200
4205568
2317923
415152
71148
1683003
1326675
8376723
3033090
1787952
991875
9893568
24843
20667
998787
665523
651468
76800
3133452
1002252
2007372
137388
82668
110592
702768
316875
2198208
280908
380208
3864675
1820523
401868
410700
820587
3282348
4953675
106032
318828
2952192
95052
676875
369603
2736075
463347
292032
142572
1283148
549552
87723
132300
1629507
688323
541875
123627
726192
811200
645888
55488
484812
384492
412923
102675
96123
384492
247107
3871488
82668
160083
86700
412923
735075
79707
311052
250563
2121843
572907
433200
77763
2922507
238572
111747
90828
71148
2101707
1030188
74892
759027
567675
1607472
1012683
645888
43923
3176523
2229132
71148
87723
5796300
823728
1905627
90828
67500
559872
161472
388800
97200
98283
738048
95052
290163
836352
2759043
580800
73008
79707
446988
84672
412923
2522667
299568
369603
65712
4435968
187500
92928
651468
499392
73008
676875
51483
79707
124848
3226107
2203347
112908
430923
529200
369603
1186923
53868
453963
318828
88752
86700
115248
54675
865107
72075
101568
109443
178608
451632
172800
384492
842700
417387
99372
67500
81675
250563
494508
708588
169932
81675
153228
726192
1322688
406272
98283
178608
659883
6935760
762048
659883
623808
487227
86700
470448
84672
54675
73947
108300
792588
388800
2036928
2071683
480000
4501875
137388
114075
154587
95052
301467
90828
84672
534252
410700
3470100
417387
49923
3823923
444675
46875
914112
226875
2323200
777243
435483
2354988
2805267
480000
78732
361227
4702512
60492
4092672
435483
76800
440067
426387
424128
492075
1108992
565068
311052
80688
3803628
484812
726192
375948
440067
69312
421875
599427
367500
412923
610203
798768
437772
375948
373827
1291008
357075
3213675
99372
845883
384492
7508172
61347
80688
270000
79707
428652
165675
524172
1559523
309123
57132
691200
109443
744012
69312
3226107
371712
2684748
4680003
46128
46128
519168
583443
49923
279075
852267
164268
115248
64827
887808
446988
789507
430923
496947
76800
117612
72075
106032
85683
84672
2605872
86700
103788
1087212
71148
348843
623808
1598700
1948908
1047843
180075
671187
344763
2408448
549552
458643
81675
102675
705675
169932
554700
637563
96123
629292
694083
70227
132300
426387
536787
2857728
98283
75843
1470000
2622675
117612
679728
1221132
181548
79707
76800
1559523
534252
539328
3939948
101568
180075
79707
1030188
2312652
49923
43923
1370928
1687500
596748
472827
53067
100467
470448
501843
477603
51483
141267
554700
554700
482403
86700
72075
472827
2484300
245388
1611867
3953712
64827
482403
623808
544428
76800
71148
106032
262848
2457075
699867
562467
58800
1205868
1769472
146523
397488
2056752
361227
795675
314928
264627
3232332
808083
468075
575532
236883
336675
1175628
77763
106032
849072
894348
4085667
2928432
756012
114075
2291628
330672
73008
665523
1302843
336675
49923
393132
1629507
2940300
1787952
1660608
62208
64827
974700
266412
175692
371712
47628
58800
150528
297675
73008
13068
359148
534252
77763
417387
58800
702768
344763
786432
45387
552123
290163
57132
77763
729147
107163
85683
51483
217083
575532
607500
31212
80688
62208
45387
433200
410700
511707
480000
814323
104907
31827
382347
506763
58800
178608
221952
38988
213867
971283
688323
45387
524172
395307
95052
172800
662700
132300
73947
292032
24843
419628
174243
126075
288300
270000
747003
456300
648675
1030188
494508
101568
830028
133563
884547
395307
153228
115248
76800
714432
78732
282747
74892
645888
279075
261075
477603
71148
336675
44652
133563
268203
648675
82668
1322688
53868
63075
150528
39675
475212
34992
50700
565068
282747
85683
404067
623808
340707
226875
442368
771147
277248
717363
279075
384492
97200
134832
691200
266412
1461612
293907
795675
112908
61347
845883
168507
66603
541875
129792
73947
45387
109443
711507
539328
426387
187500
446988
195075
668352
2167500
193548
108300
112908
419628
81675
59643
541875
399675
499392
626547
858675
259308
428652
492075
437772
46875
930747
367500
58800
243675
610203
412923
127308
363312
780300
177147
964467
220323
324723
107163
852267
220323
104907
189003
35643
243675
225228
36300
82668
1259712
378075
79707
426387
70227
43923
134832
226875
90828
645888
526683
95052
67500
1701027
78732
1023168
534252
868332
615627
726192
74892
371712
552123
504300
531723
286443
651468
634800
180075
47628
59643
565068
147852
108300
231852
98283
66603
73947
118803
69312
48387
92928
96123
468075
37632
250563
874800
499392
607500
75843
699867
142572
489648
168507
74892
1076403
744012
151875
720300
456300
711507
58800
588747
228528
978123
66603
71148
59643
426387
68403
324723
114075
57132
115248
103788
68403
509232
192027
417387
777243
361227
120000
531723
1190700
541875
578163
591408
549552
72075
101568
1005723
43923
426387
297675
771147
570288
108300
839523
40368
141267
2249868
388800
46128
275427
235200
108300
92928
33708
20667
49923
192027
487227
24843
897627
52272
823728
747003
41772
18252
32448
371712
36300
16875
50700
46128
404067
41772
25947
22188
56307
35643
19200
66603
16428
59643
74892
67500
30603
75843
801867
24843
40368
424128
116427
881292
68403
20667
68403
57132
78732
220323
41772
118803
36300
60492
33708
31212
39675
40368
31827
24843
1160652
632043
72075
534252
38988
65712
30000
38307
874800
944163
11907
111747
19683
39675
30603
66603
484812
28227
129792
52272
34347
46128
31827
61347
549552
559872
23232
924075
602112
588747
30000
363312
36963
338688
68403
685452
21675
38307
29403
714432
28227
181548
141267
30000
26508
84672
567675
30603
75843
17787
67500
610203
48387
65712
56307
798768
69312
68403
572907
41772
729147
32448
1160652
137388
480000
20667
116427
884547
31827
109443
536787
196608
24300
524172
588747
79707
13068
26508
54675
24300
23763
97200
60492
27075
855468
132300
60492
28227
65712
44652
19683
22188
28812
79707
44652
74892
583443
184512
539328
27075
783363
180075
53067
66603
699867
93987
122412
16428
154587
56307
22188
43923
830028
65712
487227
80688
268203
621075
72075
56307
29403
10800
21168
31212
87723
30000
27648
830028
559872
187500
187500
34347
762048
41772
109443
804972
768108
32448
559872
795675
74892
22707
401868
226875
475212
111747
645888
24300
88752
25947
83667
50700
104907
104907
42483
31212
43200
783363
33708
449307
732108
36300
79707
72075
23232
49923
37632
36300
45387
15987
792588
30000
84672
9075
24300
87723
20667
34347
386643
444675
65712
53067
34992
572907
1062075
753003
70227
95052
43923
43923
659883
93987
71148
39675
27648
795675
34347
861888
266412
861888
31827
21675
699867
49923
33075
72075
20667
90828
682587
207507
477603
567675
29403
52272
33708
417387
501843
70227
509232
33075
8748
69312
643107
643107
282747
299568
41772
100467
117612
35643
57963
104907
100467
24843
995328
61347
25947
30603
24843
82668
11532
28812
87723
22707
85683
72075
24300
81675
81675
37632
42483
71148
30000
100467
17328
124848
34992
25392
910803
102675
309123
102675
1160652
57963
37632
157323
38988
63075
86700
23232
18723
70227
58800
231852
39675
53067
645888
506763
43923
88752
428652
25947
112908
168507
702768
25392
67500
46128
618348
1202067
83667
261075
28227
357075
1134675
596748
28812
559872
56307
717363
654267
40368
102675
86700
1153200
39675
451632
21675
88752
662700
648675
549552
634800
44652
23232
73947
504300
676875
36963
261075
12675
13068
524172
34347
31827
634800
52272
28812
855468
35643
21168
14283
33075
252300
509232
477603
334668
80688
128547
768108
15987
28812
1098075
85683
14700
39675
41067
623808
49152
492075
39675
73008
19683
24300
676875
41067
390963
81675
80688
665523
97200
45387
73947
26508
29403
117612
463347
20667
504300
671187
38988
32448
18723
21675
231852
484812
102675
87723
24843
29403
43923
42483
668352
70227
212268
426387
68403
964467
79707
688323
68403
34347
711507
19200
167088
521667
472827
180075
52272
33708
59643
261075
23232
35643
934092
801867
109443
24300
406272
332667
795675
1002252
43923
957675
961068
1546572
23763
435483
446988
89787
648675
59643
80688
73008
70227
109443
1283148
172800
69312
74892
63948
20667
44652
88752
34992
887808
487227
24300
78732
336675
87723
64827
19683
45387
80688
26508
17787
58800
106032
930747
814323
36300
97200
82668
22188
29403
53868
801867
46128
511707
235200
426387
90828
839523
28227
1005723
39675
924075
10092
588747
95052
397488
44652
789507
62208
849072
75843
33708
1030188
41772
81675
12675
110592
23232
53868
13467
36300
91875
57963
588747
591408
46128
143883
53868
25392
28227
24300
37632
103788
610203
25392
53868
602112
14283
30603
480000
90828
66603
814323
15987
359148
833187
49152
16875
149187
31212
83667
44652
1016172
964467
80688
82668
957675
330672
726192
29403
15987
937443
59643
75843
38307
23232
129792
40368
81675
57132
60492
20172
19200
106032
59643
424128
79707
110592
88752
541875
24300
529200
395307
107163
373827
53868
82668
583443
668352
747003
218700
1244208
20172
1638363
924075
23232
637563
38307
804972
615627
29403
1370928
1130988
842700
45387
738048
1403568
1370928
665523
112908
20667
2916588
1391283
1168128
1334667
2413827
2392347
34347
668352
21675
659883
947532
4823472
39675
390963
1069227
1080000
16875
2747547
1342683
1638363
22707
58800
3803628
1860468
2799468
1338672
106032
1465803
511707
1016172
39675
1436592
22188
964467
53067
623808
111747
475212
567675
868332
1478412
1825200
73008
2106732
92928
470448
104907
64827
33708
1175628
83667
55488
1815852
72075
567675
84672
98283
2934363
2198208
1428300
1354752
637563
1198272
1002252
43200
1123632
792588
44652
971283
738048
1023168
375948
823728
1714608
98283
57132
1383123
1512300
732108
43200
991875
1920000
1002252
1160652
2605872
2451648
1012683
1848675
2600283
1002252
2126892
380208
1470000
31827
607500
29403
3722988
1255827
578163
62208
388800
1002252
2811072
32448
90828
2142075
81675
833187
17787
3000000
45387
38988
3276075
46128
1620675
1853388
1403568
1665075
34992
62208
1023168
86700
557283
18723
41772
33708
623808
621075
1037232
1160652
107163
29403
3402675
7803
1221132
811200
57132
46128
615627
1171875
10443
621075
82668
1271403
874800
17328
27075
92928
63948
13068
15123
15987
42483
46875
39675
629292
74892
2690427
10800
102675
28227
11163
10800
1083603
567675
1581228
35643
11907
13068
1101708
92928
33075
944163
73008
83667
694083
15987
1589952
116427
583443
927408
2484300
12288
756012
1424163
15987
494508
82668
1062075
823728
1963443
3000000
55488
588747
17787
1123632
6151872
482403
87723
12288
820587
58800
56307
231852
241968
93987
79707
514188
674028
71148
591408
792588
428652
43200
123627
408483
34347
36300
77763
28227
433200
771147
137388
1051392
78732
292032
3176523
195075
92928
744012
98283
75843
410700
133563
83667
451632
86700
96123
316875
164268
221952
223587
87723
30603
43923
640332
1190700
63948
101568
384492
386643
117612
34992
121203
167088
43923
15123
184512
49152
552123
110592
149187
1963443
96123
303372
30000
85683
111747
365403
2600283
153228
397488
501843
1741932
43923
145200
85683
177147
53868
72075
43923
100467
143883
705675
157323
386643
217083
741027
468075
146523
81675
218700
106032
897627
262848
109443
62208
34992
92928
67500
2467947
254043
102675
390963
264627
61347
632043
424128
4392300
64827
85683
305283
602112
107163
338688
1465803
280908
116427
175692
55488
1232643
151875
192027
765075
31212
192027
73947
110592
328683
3282348
12675
311052
268203
225228
85683
1862832
9747
210675
53868
57963
195075
54675
126075
165675
120000
463347
162867
158700
45387
153228
82668
108300
2260272
22707
1275312
1625088
95052
410700
43923
823728
792588
32448
37632
93987
108300
80688
440067
181548
228528
57963
134832
76800
1105347
449307
164268
65712
397488
70227
282747
225228
259308
380208
38988
122412
299568
95052
112908
262848
106032
671187
102675
121203
55488
56307
50700
158700
158700
117612
75843
165675
1482627
615627
230187
36300
534252
108300
626547
54675
460992
122412
49923
419628
4312803
273612
67500
1457427
207507
131043
133563
691200
101568
346800
369603
88752
49923
97200
73947
178608
155952
207507
717363
86700
332667
54675
160083
1116300
924075
348843
63075
43923
472827
46128
266412
9408
1508043
262848
157323
181548
76800
1495308
1379052
991875
365403
38307
117612
557283
451632
160083
430923
9747
729147
53067
21675
106032
610203
96123
196608
549552
55488
36963
13467
78732
57132
126075
223587
3790128
1594323
95052
544428
49152
69312
63075
811200
73008
46875
131043
1403568
243675
74892
367500
41772
215472
688323
153228
632043
578163
336675
596748
34347
250563
2061723
241968
160083
359148
404067
221952
73008
679728
40368
2208492
55488
900912
57132
284592
390963
81675
154587
201243
578163
9075
386643
233523
357075
415152
75843
228528
2667747
221952
43200
38307
230187
1982907
73947
71148
95052
318828
196608
25947
61347
83667
397488
1440747
193548
69312
82668
103788
217083
201243
612912
79707
261075
408483
52272
19200
1576875
621075
682587
54675
334668
4568268
90828
143883
295788
10092
342732
44652
252300
202800
41772
261075
526683
27075
254043
80688
116427
1168128
233523
717363
314928
88752
129792
70227
70227
1687500
138675
221952
134832
225228
674028
519168
424128
100467
266412
117612
60492
290163
250563
32448
406272
340707
34347
69312
128547
114075
101568
303372
36963
73947
2594700
578163
93987
62208
463347
95052
97200
150528
141267
178608
51483
981552
257547
118803
41772
484812
309123
61347
2793675
85683
62208
98283
1815852
13467
80688
64827
330672
1002252
13068
640332
937443
350892
109443
338688
31212
115248
904203
826875
2386992
120000
71148
1012683
255792
134832
546987
4861587
212268
309123
3276075
41772
277248
361227
930747
2413827
2354988
53067
46128
52272
189003
199692
795675
116427
142572
100467
87723
160083
118803
322752
91875
4269747
2187948
49152
836352
836352
477603
563436
12362700
4414107
1062075
22188
1236492
44652
106032
5729772
2707500
46875
643107
86700
460992
43200
1603083
3048192
77763
937443
45387
1744158
1274229
5964300
59643
33075
46128
504300
519168
2187948
43200
118803
2056164
1318707
39675
39675
4219788
4184283
1186923
1774083
526683
41067
52272
437772
111747
4269747
3294912
34992
1274229
1338672
1044300
63948
39675
671187
1016172
34347
56307
20667
116427
34992
79707
1428300
472827
79707
664200
36300
1330668
49923
52272
544428
1094448
61347
78732
395307
92928
26508
591408
5896812
492075
12362700
25947
146523
967872
21675
90828
524172
47628
42483
839496
940800
1362828
52272
924075
74892
399675
45387
72075
27075
84672
73008
671187
1236492
771147
55488
59643
836352
671187
80688
77763
98283
437772
92928
487227
76800
112908
419628
55488
3294912
615627
73947
5261583
34347
58800
1194483
195075
720300
87723
56307
1274229
53067
516675
72075
108300
121203
80688
526683
53067
1274229
967872
33075
102675
3207468
777243
66603
732108
48387
489648
58800
747003
68403
444675
50700
1533675
109443
526683
16875
160083
28812
580800
23232
44652
49923
63948
1194483
38988
15123
51483
739140
52272
56307
1848675
47628
390963
3383532
1603083
103788
46875
57963
5483712
80688
100467
75843
30603
11319918
39675
1094448
82668
80688
1603083
468075
472827
292032
940800
52272
24843
1905627
1105347
133563
49152
1491075
103788
73947
732108
524172
38307
1362828
193548
395307
783363
4184283
591408
27075
215472
25392
61347
637563
4762800
26508
104907
406272
634800
75843
72075
637563
82668
1696512
739140
967872
116427
15123
406272
21168
92928
38988
1905627
3623403
5483712
72075
35643
54675
33708
489648
36963
1656147
2017200
142572
145200
79707
60492
84672
47628
114075
487227
104907
63948
56307
146523
307200
3383532
4414107
477603
747003
70227
80688
58800
53067
539328
1820523
22707
765075
55488
29403
1318707
266412
29403
21675
3060300
42483
4414107
404067
127308
292032
489648
31212
3623403
991875
108300
1255827
1274229
1696512
51483
783363
64827
1687500
116427
1145772
78732
254043
46875
37874892
437772
30000
978123
3983418
1274229
23763
74892
106032
15123
48387
1482627
52272
36300
1274229
103788
637563
2286387
489648
79707
37632
70227
524172
30000
45387
38988
34347
81675
77763
723243
58800
1436592
100467
210675
40368
35643
3983418
96123
46128
104907
88752
97200
49152
65712
563436
142572
1274229
22188
293907
747003
3983418
83667
27075
50700
109443
588747
1037232
95052
777243
27075
51483
88752
266412
836352
82668
6203532
390963
1774083
49152
51483
73008
151875
1403568
43200
878043
1236492
33708
1415907
112908
71148
46875
83667
71148
1255827
97200
71148
388800
25947
27075
322752
978123
738048
54675
3207468
79707
103788
174243
61347
41390661
39675
42483
53067
671187
1274229
72075
73947
732108
78732
44652
21168
406272
213867
44652
15123
1499547
180075
36300
878043
180075
33075
248832
76800
205932
318828
526683
594075
56307
48387
504300
1354752
84672
124848
51483
914100
46128
588747
702768
195075
112908
865107
102675
648648
42483
676875
20667
39675
146523
50700
440067
27648
468075
90828
104907
594075
814323
223587
2349675
3060300
1016172
37632
13893552
53067
1428300
40368
11319918
63948
25392
51483
295788
38988
924075
395307
1204896
92928
29371923
674028
98283
84672
73008
114075
732108
1255827
33708
406272
104907
92928
51483
56307
1486848
75843
71148
56307
57963
64827
3060300
1687500
5313711
460992
132300
496947
90828
131043
4501875
78732
115248
96123
1978032
82668
1499547
70227
51483
87723
168507
504300
44652
71148
55488
395307
37632
2198208
3232332
80688
468075
75843
720300
6203532
51483
103788
1274229
56307
95052
472827
90828
22707
390963
4248300
739140
720300
44652
1145772
1428300
290163
8286732
1428300
39675
984987
73947
155952
23232
1342683
3207468
2111763
446988
5729772
1499547
25947
46875
978123
33708
878043
602112
43923
114075
68403
72075
73947
607500
47628
43923
108300
468075
382347
71148
1274229
189003
38307
591408
1353984
51483
84672
13893552
10079667
1274229
81675
34992
16875
3383532
967872
544428
25392
940800
637563
25947
6642432
28227
5729772
626547
46128
34347
103788
1062075
390963
11319918
89787
28812
56307
475212
50700
67500
46128
3294912
57963
2805267
165675
40368
88752
406272
44652
95052
1274229
460992
34347
2198208
2805267
27648
58800
72075
1318707
63948
28227
26508
43923
44652
46128
881292
63075
4241163
59643
85683
292032
39675
1130988
59643
534252
27075
32448
1198272
777243
83667
2198208
90828
4291248
16428
11532
20172
6615675
13068
33075
18723
3409068
1002252
2392347
95052
2408448
1090827
836352
1298892
1633932
18252
8748
14700
48387
72075
45387
43200
1411788
521667
1362828
20667
67500
28227
49923
578163
694083
17787
596748
38988
412923
34347
44652
23232
546987
25392
2177712
38988
3096768
2091675
26508
1287075
2317923
52272
4403196
15552
544428
38307
4078668
3636603
63075
99372
88752
894348
855468
64827
15552
30000
40368
24300
4078668
3294912
6177675
67500
21168
18252
4597932
1127307
1294947
10092
1978032
87723
954288
24300
10443
11532
2916588
24843
5622483
99372
68403
599427
21675
1232643
404067
4762800
514188
3383532
1248075
399675
2213643
50700
1877043
15987
6333627
1287075
484812
1302843
23763
15123
1585587
117612
894348
1963443
15987
1811187
3518667
1692003
11907
2307387
2086668
23763
38988
3121200
1495308
1436592
41772
100467
27075
516675
66603
419628
20667
4590507
7282092
83667
615627
67500
1240347
792588
2857728
1848675
699867
2397708
109443
9408
618348
27075
15123
5015547
19683
8629248
940800
711507
13872
1411788
86700
6655830
2667747
2046828
14283
71148
442368
10092
5298723
30603
4291248
4022892
1054947
541875
16428
13068
618348
534252
1291008
988428
444675
6070518
1156923
63075
7565232
399675
891075
95052
465708
21168
49152
60492
56307
23763
4755243
1054947
2041875
820587
435483
2753292
526683
720300
839523
56307
671187
397488
43923
1194483
82668
506763
2249868
1470000
580800
988428
1474203
10830000
99372
22188
20172
596748
602112
2622675
412923
64827
33708
12675
440067
89787
42483
12675
38307
49152
22188
98283
10800
15123
17787
33075
19200
24843
711507
774192
46875
688323
39675
2051787
855468
43923
421875
25392
15552
11532
155952
21675
424128
68403
7803
15987
1669548
424128
71148
7203
20667
40368
15552
36300
13068
16875
1105347
395307
1047843
85683
13872
34992
47628
86700
73008
57963
30603
16875
36963
29403
591408
66603
8427
21675
1432443
11163
1334667
43923
10443
33708
927408
16875
118803
39675
41772
37632
33708
852267
31827
31212
1516563
25392
19200
56307
49923
833187
25392
54675
56307
27075
3036108
41067
887808
39675
19683
1051392
28227
13872
440067
696972
11907
8427
11532
80688
950907
104907
971283
31827
35643
735075
18723
410700
30603
89787
15123
501843
70227
11163
22707
36963
1221132
31212
60492
18723
30603
30603
33708
408483
22188
34992
1403568
16875
549552
20172
18252
31827
120000
102675
1350723
74892
63948
85683
1105347
22188
49152
762048
112908
7803
26508
18252
43200
13068
63948
494508
15123
421875
23232
32448
25392
63075
489648
855468
2111763
46128
36300
25947
9747
521667
786432
57132
8748
107163
38988
30000
60492
41772
15123
23763
24300
57963
12675
456300
64827
1594323
30603
59643
91875
12288
54675
41067
765075
1012683
36963
72075
1769472
34347
83667
1474203
63075
33708
56307
1030188
792588
904203
390963
55488
10092
29403
648675
18723
15123
49923
845883
17787
40368
25392
34992
22188
1179387
384492
46875
14700
34992
9747
20667
16428
51483
56307
1033707
25947
623808
27075
63948
1279227
29403
14700
991875
47628
34347
99372
10092
80688
55488
12675
26508
21168
1482627
768108
7803
27648
54675
1279227
19200
48387
27648
59643
659883
1119963
43923
369603
15123
92928
62208
41772
15987
16428
8112
33075
12288
78732
777243
1330668
16428
21675
705675
23232
10092
868332
9075
43923
78732
63075
14283
58800
25947
35643
116427
30000
35643
1255827
16428
21675
48387
13872
16428
30603
10443
41067
868332
15987
43200
1512300
21168
44652
998787
277248
9747
39675
35643
48387
63075
73947
562467
1033707
21675
88752
1047843
28812
33708
38307
17787
75843
18723
88752
67500
29403
20667
38988
17787
524172
23763
13872
468075
57963
814323
99372
69312
1314732
27075
1638363
1424163
881292
25947
14700
39675
26508
19683
708588
63075
45387
35643
75843
514188
1179387
91875
61347
55488
106032
11163
21675
81675
521667
865107
42483
1310763
19683
85683
1656147
25392
442368
19200
20172
612912
35643
45387
744012
56307
18723
26508
27648
1774083
1087212
1905627
920748
995328
21675
7203
4516587
72075
29403
13467
70227
71148
13872
31827
24300
41772
28227
871563
81675
720300
70227
15123
45387
41772
64827
340707
33075
13467
57963
833187
37632
17787
62208
68403
64827
15987
14700
53067
1123632
53067
30000
38988
1465803
34347
852267
33708
24843
30603
1134675
25392
19683
13068
38307
21675
20172
66603
13872
17328
971283
789507
16428
20667
68403
108300
406272
13872
99372
106032
57963
107163
562467
32448
13068
75843
65712
14700
55488
924075
47628
90828
71148
27075
22707
9747
30000
723243
49923
1714608
15987
1138368
41772
1968300
11907
33075
33075
924075
49923
19200
20667
22188
2017200
102675
1620675
42483
15552
40368
32448
22707
103788
49152
16428
826875
68403
12288
56307
11532
46875
1470000
1186923
1033707
1080000
565068
41067
28812
11907
36300
14700
401868
107163
9075
1105347
65712
604803
84672
480000
428652
399675
58800
18252
1403568
47628
284592
23763
33075
1395372
375948
11907
30000
30603
57963
881292
58800
16428
25392
428652
514188
162867
433200
68403
393132
31827
193548
141267
477603
85683
13068
19200
489648
48387
118803
465708
270000
3232332
37632
36300
21675
38307
419628
193548
671187
578163
51483
27648
444675
116427
32448
3857868
44652
1138368
37632
28227
4793088
390963
3042147
79707
39675
209088
39675
115248
365403
3518667
1607472
46875
90828
91875
85683
544428
39675
750000
26508
1769472
2566875
56307
36300
117612
122412
84672
1944075
34992
2392347
583443
85683
58800
28227
726192
43923
386643
64827
17328
151875
580800
1267500
501843
76800
84672
826875
70227
30603
63075
2056752
489648
295788
591408
67500
153228
504300
61347
410700
39675
215472
47628
89787
87723
456300
47628
67500
750000
16875
290163
57963
1358787
30603
753003
16428
426387
629292
27648
34347
1065648
506763
726192
482403
73008
89787
2673408
39675
13872
50700
19683
107163
60492
75843
35643
73008
92928
71148
31212
58800
11907
32448
1069227
53868
162867
1186923
30000
245388
59643
679728
17328
67500
41067
50700
1811187
1723692
22707
128547
472827
23232
28227
49923
33075
554700
21675
567675
98283
46875
57963
46875
109443
41067
268203
41772
145200
1669548
16428
69312
54675
1314732
49152
401868
54675
38988
36300
524172
61347
440067
82668
72075
51483
1537968
84672
5203467
4597932
53067
397488
36963
1090827
544428
17328
204363
702768
36963
679728
720300
212268
415152
42483
89787
66603
28812
404067
73947
887808
150528
42483
38988
708588
53067
34347
322752
66603
367500
221952
1751088
58800
90828
460992
54675
254043
142572
11532
373827
65712
106032
1175628
570288
519168
41067
1087212
83667
305283
184512
79707
744012
741027
453963
150528
1611867
193548
401868
280908
53067
70227
654267
129792
845883
69312
1576875
1044300
382347
168507
586092
126075
85683
195075
172800
81675
1478412
1444908
783363
161472
221952
100467
759027
262848
1525107
433200
1370928
442368
84672
410700
311052
162867
934092
404067
57132
51483
711507
19200
688323
115248
1248075
668352
567675
842700
132300
346800
205932
231852
122412
43200
1598700
96123
243675
615627
162867
1444908
50700
907500
726192
1183152
2041875
67500
1275312
165675
674028
46128
63075
1732800
477603
67500
58800
167088
312987
280908
238572
45387
262848
153228
519168
69312
2759043
63075
40368
2157312
1251948
415152
77763
50700
73947
1829883
1399467
128547
75843
67500
632043
136107
27075
89787
2234307
134832
57132
133563
41067
1346700
303372
1867563
104907
506763
468075
57132
1598700
73008
1171875
468075
121203
388800
72075
89787
1160652
56307
56307
910803
171363
399675
131043
708588
63948
82668
79707
192027
154587
174243
136107
81675
410700
529200
1127307
116427
235200
154587
78732
750000
998787
60492
855468
61347
654267
480000
167088
1568187
145200
132300
181548
2229132
153228
634800
210675
61347
81675
69312
96123
73008
887808
150528
395307
64827
131043
1108992
295788
1461612
382347
509232
257547
73947
54675
111747
38307
168507
861888
914112
562467
142572
150528
295788
1123632
73947
567675
228528
117612
944163
44652
871563
430923
330672
107163
52272
521667
355008
444675
97200
1194483
570288
453963
80688
292032
1546572
68403
355008
118803
460992
735075
424128
705675
167088
314928
275427
1764867
180075
643107
167088
477603
198147
131043
991875
408483
744012
131043
1651692
477603
552123
117612
591408
780300
1997568
78732
109443
334668
254043
648675
57963
626547
40368
142572
305283
1164387
88752
103788
465708
1023168
68403
717363
80688
247107
262848
557283
62208
38307
65712
118803
72075
26508
591408
399675
1030188
131043
417387
322752
141267
342732
995328
1267500
231852
1358787
549552
153228
171363
85683
207507
288300
1145772
720300
645888
61347
81675
1279227
78732
109443
153228
1116300
1213488
694083
426387
221952
59643
53868
1002252
338688
131043
1687500
30000
69312
78732
1647243
1255827
1030188
529200
1594323
220323
1302843
424128
164268
665523
539328
67500
1108992
90828
1062075
147852
183027
484812
39675
240267
89787
81675
565068
480000
36963
1224963
914112
151875
419628
67500
226875
63075
223587
117612
426387
565068
42483
424128
73947
73947
250563
71148
1330668
99372
1678512
3036108
352947
729147
121203
386643
174243
69312
494508
46875
187500
286443
100467
220323
524172
225228
175692
266412
384492
789507
74892
519168
415152
52272
406272
95052
607500
103788
70227
629292
59643
93987
87723
131043
1516563
254043
440067
57963
35643
1383123
255792
196608
475212
171363
133563
184512
1205868
567675
282747
168507
184512
1130988
444675
60492
1198272
729147
157323
189003
30000
69312
201243
160083
83667
86700
1829883
1291008
86700
38988
2142075
82668
56307
83667
47628
1171875
1915203
241968
662700
480000
648675
477603
255792
1391283
312987
75843
186003
192027
169932
68403
1271403
42483
78732
180075
1607472
615627
808083
567675
1973163
292032
629292
437772
73008
143883
845883
68403
139968
218700
282747
67500
205932
1374987
284592
1881792
218700
68403
52272
275427
46128
482403
61347
449307
1714608
780300
694083
940800
446988
77763
21168
699867
3251043
3139587
1537968
688323
426387
468075
4356075
1728243
275427
410700
604803
3307500
971283
783363
1603083
4620243
2430000
894348
7203
5322672
665523
241968
914112
676875
6976875
623808
1318707
696972
3042147
264627
1683003
71148
964467
2928432
894348
70227
1175628
2392347
1087212
109443
1019667
4370547
1259712
8823675
759027
264627
1123632
290163
762048
78732
138675
1183152
998787
3570843
891075
657072
1366875
1016172
282747
1403568
121203
1153200
386643
645888
947532
546987
348843
81675
449307
738048
1062075
1054947
1741932
38988
1291008
648675
3704778
679728
31827
90828
213867
914112
1123632
4523952
2566875
694083
910803
85683
1625088
7170348
836352
4284075
480000
526683
1542267
2457075
344763
998787
2137008
1186923
6169068
1005723
659883
2265483
714432
408483
2539200
3722988
72075
322752
2493357
1065648
519168
257547
155952
477603
96123
440067
1248075
3551232
1194483
904203
1030188
971283
404067
114075
2511675
1202067
277248
2096688
245388
245388
1555200
1457427
1009200
419628
1760268
115248
1555200
1072812
1275312
674028
1555200
49923
85683
1559523
4612800
1127307
567675
230187
1550883
602112
65712
2131947
1087212
820587
2555787
792588
1205868
4443267
2017200
629292
22188
460992
726192
1769472
1012683
1482627
1801875
1205868
1119963
1537968
1306800
688323
72075
2808189
1387200
3377163
80688
27648
4984563
1123632
7766643
280908
404067
6255408
1224963
30603
3219888
100467
988428
524172
1240347
12288
477603
5508075
759027
930747
2851875
39675
390963
1145772
4212675
4298427
1660608
1651692
84672
484812
93987
1202067
417387
1069227
723243
615627
2555787
529200
3139587
100467
536787
99372
643107
4377792
984987
332667
41067
954288
3656448
5998188
10800
662700
662700
7142547
2142075
69312
3722988
2041875
591408
588747
995328
798768
81675
823728
575532
891075
789507
1198272
4298427
3531675
24300
2776332
116427
80688
726192
63948
3158028
98283
275427
1037232
23763
2673408
4687500
1350723
1820523
696972
1271403
243675
72075
20172
1175628
28812
67500
808083
2583552
59643
53868
1362828
735075
295788
19200
314928
90828
1198272
41772
6220800
5713200
509232
676875
729147
83667
496947
836352
1019667
808083
6160467
2307387
2764800
100467
2349675
657072
1263603
1175628
1101708
129792
499392
1051392
1271403
1696512
2840187
729147
1461612
814323
1244208
2736075
1774083
1997568
85683
861888
5985468
67500
60492
23763
1395372
845883
412923
5314683
1168128
1051392
714432
817452
79707
1585587
1134675
852267
1797228
657072
612912
694083
747003
868332
280908
1051392
596748
1905627
2086668
5844960
3219888
4613820
50700
917427
845883
1665075
1072812
1314732
3396288
3024048
559872
3276075
213867
1156923
2892972
744012
961068
1040763
209088
58800
516675
1411788
2589123
2440812
4961388
348843
1087212
900912
99372
557283
3030075
685452
In [16]:
img_data[0].shape
Out[16]:
(128, 128, 3)
In [17]:
x = np.expand_dims(img_data, axis=4)
x.shape
Out[17]:
(4750, 128, 128, 3, 1)
In [18]:
image_col_name = 'image_resized'
n_random_image_species(5)
Species:  Sugar beet
shape:  (128, 128, 3) 


Species:  Cleavers
shape:  (128, 128, 3) 


Species:  Charlock
shape:  (128, 128, 3) 


Species:  Sugar beet
shape:  (128, 128, 3) 


Species:  Maize
shape:  (128, 128, 3) 


All images are of unified shape and normalized now

we are keeping data in x and y variable for training

3.A.

Splitting data into training and testing set

In [19]:
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=50)

Creating validation set

In [20]:
random_seed = 2
X_test, X_val, y_test, y_val = train_test_split(X_test, y_test, test_size = 0.5, random_state=random_seed)
In [21]:
print("Shapes of Train sets are: ", X_train.shape)
print("Shapes of Test sets are: ", X_test.shape)
print("Shapes of Validation sets are: ", X_val.shape)
Shapes of Train sets are:  (3325, 128, 128, 3, 1)
Shapes of Test sets are:  (712, 128, 128, 3, 1)
Shapes of Validation sets are:  (713, 128, 128, 3, 1)

3.B.

Creating CNN architecture for model

In [22]:
def cnn_model(height, width, num_channels, num_classes, loss='categorical_crossentropy', metrics=['accuracy']):
  batch_size = None

  model = Sequential()

  model.add(Conv2D(filters = 32, kernel_size = (5,5),padding = 'Same', 
                  activation ='relu', batch_input_shape = (batch_size, height, width, num_channels)))


  model.add(Conv2D(filters = 32, kernel_size = (5,5),padding = 'Same', 
                  activation ='relu'))
  model.add(MaxPool2D(pool_size=(2,2)))
  model.add(Dropout(0.2))


  model.add(Conv2D(filters = 64, kernel_size = (3,3),padding = 'Same', 
                  activation ='relu'))
  model.add(Conv2D(filters = 64, kernel_size = (3,3),padding = 'same', 
                  activation ='relu'))
  model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))
  model.add(Dropout(0.3))

  model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same', 
                  activation ='relu'))
  model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same', 
                  activation ='relu'))
  model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))
  model.add(Dropout(0.4))



  model.add(GlobalMaxPooling2D())
  model.add(Dense(128, activation = "relu"))
  model.add(Dropout(0.5))
  model.add(Dense(num_classes, activation = "softmax"))

  optimizer = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
  model.compile(optimizer = optimizer, loss = loss, metrics = metrics)
  model.summary()
  return model
In [23]:
cnn = cnn_model(IMG_HEIGHT, IMG_WIDTH, 3, num_classes)
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d (Conv2D)             (None, 128, 128, 32)      2432      
                                                                 
 conv2d_1 (Conv2D)           (None, 128, 128, 32)      25632     
                                                                 
 max_pooling2d (MaxPooling2D  (None, 64, 64, 32)       0         
 )                                                               
                                                                 
 dropout (Dropout)           (None, 64, 64, 32)        0         
                                                                 
 conv2d_2 (Conv2D)           (None, 64, 64, 64)        18496     
                                                                 
 conv2d_3 (Conv2D)           (None, 64, 64, 64)        36928     
                                                                 
 max_pooling2d_1 (MaxPooling  (None, 32, 32, 64)       0         
 2D)                                                             
                                                                 
 dropout_1 (Dropout)         (None, 32, 32, 64)        0         
                                                                 
 conv2d_4 (Conv2D)           (None, 32, 32, 128)       73856     
                                                                 
 conv2d_5 (Conv2D)           (None, 32, 32, 128)       147584    
                                                                 
 max_pooling2d_2 (MaxPooling  (None, 16, 16, 128)      0         
 2D)                                                             
                                                                 
 dropout_2 (Dropout)         (None, 16, 16, 128)       0         
                                                                 
 global_max_pooling2d (Globa  (None, 128)              0         
 lMaxPooling2D)                                                  
                                                                 
 dense (Dense)               (None, 128)               16512     
                                                                 
 dropout_3 (Dropout)         (None, 128)               0         
                                                                 
 dense_1 (Dense)             (None, 12)                1548      
                                                                 
=================================================================
Total params: 322,988
Trainable params: 322,988
Non-trainable params: 0
_________________________________________________________________
c:\ProgramData\Anaconda3\lib\site-packages\keras\optimizers\optimizer_v2\rmsprop.py:140: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.
  super().__init__(name, **kwargs)

3.C.

In [24]:
num_classes
Out[24]:
12
In [25]:
batch_size = 38
history = cnn.fit(X_train, 
                  y_train, 
                  epochs = 50, 
                  validation_data = (X_val, y_val),
                  batch_size = batch_size)
Epoch 1/50
88/88 [==============================] - 186s 2s/step - loss: 2.4595 - accuracy: 0.1254 - val_loss: 2.4461 - val_accuracy: 0.1346
Epoch 2/50
88/88 [==============================] - 169s 2s/step - loss: 2.4186 - accuracy: 0.1525 - val_loss: 2.3798 - val_accuracy: 0.2539
Epoch 3/50
88/88 [==============================] - 151s 2s/step - loss: 2.2455 - accuracy: 0.2472 - val_loss: 2.1651 - val_accuracy: 0.2707
Epoch 4/50
88/88 [==============================] - 141s 2s/step - loss: 2.1037 - accuracy: 0.2659 - val_loss: 2.1237 - val_accuracy: 0.2903
Epoch 5/50
88/88 [==============================] - 135s 2s/step - loss: 2.0708 - accuracy: 0.2863 - val_loss: 1.9445 - val_accuracy: 0.3408
Epoch 6/50
88/88 [==============================] - 133s 2s/step - loss: 1.9148 - accuracy: 0.3311 - val_loss: 1.8102 - val_accuracy: 0.4432
Epoch 7/50
88/88 [==============================] - 134s 2s/step - loss: 1.7714 - accuracy: 0.4003 - val_loss: 1.8288 - val_accuracy: 0.3576
Epoch 8/50
88/88 [==============================] - 134s 2s/step - loss: 1.6075 - accuracy: 0.4544 - val_loss: 1.4745 - val_accuracy: 0.5947
Epoch 9/50
88/88 [==============================] - 138s 2s/step - loss: 1.4879 - accuracy: 0.4905 - val_loss: 1.3888 - val_accuracy: 0.5330
Epoch 10/50
88/88 [==============================] - 144s 2s/step - loss: 1.3814 - accuracy: 0.5296 - val_loss: 1.2663 - val_accuracy: 0.6227
Epoch 11/50
88/88 [==============================] - 149s 2s/step - loss: 1.3244 - accuracy: 0.5468 - val_loss: 1.2551 - val_accuracy: 0.6381
Epoch 12/50
88/88 [==============================] - 143s 2s/step - loss: 1.2481 - accuracy: 0.5738 - val_loss: 1.3509 - val_accuracy: 0.5596
Epoch 13/50
88/88 [==============================] - 136s 2s/step - loss: 1.1768 - accuracy: 0.5970 - val_loss: 1.3663 - val_accuracy: 0.5203
Epoch 14/50
88/88 [==============================] - 137s 2s/step - loss: 1.1060 - accuracy: 0.6229 - val_loss: 1.0859 - val_accuracy: 0.6648
Epoch 15/50
88/88 [==============================] - 136s 2s/step - loss: 1.0639 - accuracy: 0.6406 - val_loss: 1.1339 - val_accuracy: 0.6536
Epoch 16/50
88/88 [==============================] - 138s 2s/step - loss: 1.0200 - accuracy: 0.6547 - val_loss: 0.9568 - val_accuracy: 0.7013
Epoch 17/50
88/88 [==============================] - 135s 2s/step - loss: 0.9994 - accuracy: 0.6629 - val_loss: 1.0220 - val_accuracy: 0.6858
Epoch 18/50
88/88 [==============================] - 135s 2s/step - loss: 0.9049 - accuracy: 0.6890 - val_loss: 0.8447 - val_accuracy: 0.7588
Epoch 19/50
88/88 [==============================] - 133s 2s/step - loss: 0.8803 - accuracy: 0.7104 - val_loss: 1.1904 - val_accuracy: 0.6045
Epoch 20/50
88/88 [==============================] - 135s 2s/step - loss: 0.8255 - accuracy: 0.7203 - val_loss: 1.0470 - val_accuracy: 0.6494
Epoch 21/50
88/88 [==============================] - 134s 2s/step - loss: 0.7769 - accuracy: 0.7389 - val_loss: 0.8571 - val_accuracy: 0.7630
Epoch 22/50
88/88 [==============================] - 135s 2s/step - loss: 0.7662 - accuracy: 0.7380 - val_loss: 0.7119 - val_accuracy: 0.7728
Epoch 23/50
88/88 [==============================] - 131s 1s/step - loss: 0.7289 - accuracy: 0.7525 - val_loss: 0.6887 - val_accuracy: 0.7910
Epoch 24/50
88/88 [==============================] - 130s 1s/step - loss: 0.6756 - accuracy: 0.7660 - val_loss: 0.6604 - val_accuracy: 0.7896
Epoch 25/50
88/88 [==============================] - 131s 1s/step - loss: 0.6616 - accuracy: 0.7711 - val_loss: 0.8252 - val_accuracy: 0.7251
Epoch 26/50
88/88 [==============================] - 134s 2s/step - loss: 0.6188 - accuracy: 0.7883 - val_loss: 0.6123 - val_accuracy: 0.8177
Epoch 27/50
88/88 [==============================] - 135s 2s/step - loss: 0.6267 - accuracy: 0.7826 - val_loss: 0.6507 - val_accuracy: 0.8065
Epoch 28/50
88/88 [==============================] - 130s 1s/step - loss: 0.5994 - accuracy: 0.8027 - val_loss: 0.5978 - val_accuracy: 0.8247
Epoch 29/50
88/88 [==============================] - 131s 1s/step - loss: 0.5810 - accuracy: 0.8051 - val_loss: 0.7928 - val_accuracy: 0.7153
Epoch 30/50
88/88 [==============================] - 130s 1s/step - loss: 0.5657 - accuracy: 0.8090 - val_loss: 0.5345 - val_accuracy: 0.8303
Epoch 31/50
88/88 [==============================] - 132s 1s/step - loss: 0.5475 - accuracy: 0.8078 - val_loss: 0.5012 - val_accuracy: 0.8583
Epoch 32/50
88/88 [==============================] - 133s 2s/step - loss: 0.5239 - accuracy: 0.8189 - val_loss: 0.5092 - val_accuracy: 0.8401
Epoch 33/50
88/88 [==============================] - 146s 2s/step - loss: 0.5046 - accuracy: 0.8259 - val_loss: 0.4325 - val_accuracy: 0.8527
Epoch 34/50
88/88 [==============================] - 134s 2s/step - loss: 0.4996 - accuracy: 0.8367 - val_loss: 0.5366 - val_accuracy: 0.8401
Epoch 35/50
88/88 [==============================] - 130s 1s/step - loss: 0.4921 - accuracy: 0.8343 - val_loss: 0.4040 - val_accuracy: 0.8682
Epoch 36/50
88/88 [==============================] - 132s 1s/step - loss: 0.5111 - accuracy: 0.8292 - val_loss: 0.4853 - val_accuracy: 0.8331
Epoch 37/50
88/88 [==============================] - 131s 1s/step - loss: 0.4684 - accuracy: 0.8358 - val_loss: 0.4533 - val_accuracy: 0.8583
Epoch 38/50
88/88 [==============================] - 130s 1s/step - loss: 0.4766 - accuracy: 0.8361 - val_loss: 0.4471 - val_accuracy: 0.8682
Epoch 39/50
88/88 [==============================] - 131s 1s/step - loss: 0.4255 - accuracy: 0.8544 - val_loss: 0.5247 - val_accuracy: 0.8261
Epoch 40/50
88/88 [==============================] - 128s 1s/step - loss: 0.4725 - accuracy: 0.8451 - val_loss: 0.4150 - val_accuracy: 0.8612
Epoch 41/50
88/88 [==============================] - 121s 1s/step - loss: 0.4074 - accuracy: 0.8589 - val_loss: 0.4817 - val_accuracy: 0.8303
Epoch 42/50
88/88 [==============================] - 144s 2s/step - loss: 0.4382 - accuracy: 0.8466 - val_loss: 0.4430 - val_accuracy: 0.8471
Epoch 43/50
88/88 [==============================] - 130s 1s/step - loss: 0.4379 - accuracy: 0.8433 - val_loss: 0.5601 - val_accuracy: 0.8149
Epoch 44/50
88/88 [==============================] - 144s 2s/step - loss: 0.4244 - accuracy: 0.8532 - val_loss: 0.5248 - val_accuracy: 0.8289
Epoch 45/50
88/88 [==============================] - 172s 2s/step - loss: 0.3984 - accuracy: 0.8641 - val_loss: 0.3634 - val_accuracy: 0.8836
Epoch 46/50
88/88 [==============================] - 158s 2s/step - loss: 0.3862 - accuracy: 0.8680 - val_loss: 0.5525 - val_accuracy: 0.8275
Epoch 47/50
88/88 [==============================] - 157s 2s/step - loss: 0.3645 - accuracy: 0.8671 - val_loss: 0.3484 - val_accuracy: 0.8878
Epoch 48/50
88/88 [==============================] - 171s 2s/step - loss: 0.3760 - accuracy: 0.8674 - val_loss: 0.4408 - val_accuracy: 0.8555
Epoch 49/50
88/88 [==============================] - 166s 2s/step - loss: 0.3963 - accuracy: 0.8722 - val_loss: 0.3836 - val_accuracy: 0.8836
Epoch 50/50
88/88 [==============================] - 156s 2s/step - loss: 0.3732 - accuracy: 0.8671 - val_loss: 0.3474 - val_accuracy: 0.8766
In [26]:
from sklearn.metrics import confusion_matrix
In [28]:
# PREDICTIONS
y_pred = cnn.predict(X_test)
y_class = np.argmax(y_pred, axis = 1) 
y_check = np.argmax(y_test, axis = 1) 
cmatrix = confusion_matrix(y_check, y_class)
print(cmatrix)
23/23 [==============================] - 4s 184ms/step
[[ 9  0  0  0  0  0 31  0  0  0  0  0]
 [ 0 60  5  0  0  0  0  0  0  0  0  0]
 [ 0  1 27  0  0  0  0  0  2  1  2  1]
 [ 0  0  0 68  0  0  0  0  0  1  2  0]
 [ 4  0  0  0 32  2  3  0  1  0  0  0]
 [ 0  2  2  0  0 75  0  0  0  0  2  0]
 [ 3  0  0  0  0  2 90  0  1  0  0  0]
 [ 0  1  0  1  0  0  0 38  0  0  0  0]
 [ 0  0  0  0  0  0  0  1 69  0  1  0]
 [ 0  1  0  2  0  0  0  0  2 29  0  0]
 [ 0  0  0  0  0  0  0  0  0  0 82  0]
 [ 0  0  0  0  0  2  0  0  3  0  0 51]]
In [61]:
cnn_loss, cnn_accuracy = cnn.evaluate(X_test, y_test, verbose=1)
print('Test loss:', cnn_loss)
print('Test accuracy:', cnn_accuracy)
23/23 [==============================] - 4s 182ms/step - loss: 0.3211 - accuracy: 0.8848
Test loss: 0.3210514187812805
Test accuracy: 0.8848314881324768

3.D.

In [44]:
perm = np.random.choice(len(df))
perm
Out[44]:
4011
In [51]:
random_img_prediction = []
perm = np.random.choice(len(df))
plt.imshow(df[image_col_name][perm])
plt.axis("off")
plt.show()
random_image_path = os.path.join(os.path.join(train_folder_path , df['species'][perm]), df['name'][perm])
random_image_path
Out[51]:
'plant-seedlings-classification/train\\Common Chickweed\\2f60156c7.png'
In [64]:
img_pred = cv2.imread(random_image_path)
img_pred_resize = cv2.resize(img_pred,(IMG_HEIGHT, IMG_WIDTH), interpolation = cv2.INTER_CUBIC)
img_predict_plant = np.expand_dims(img_pred_resize, axis=0)
cnn_predict_image = np.argmax(cnn.predict(img_predict_plant))
print('The predicted plant is with label:', cnn_predict_image)
1/1 [==============================] - 0s 25ms/step
The predicted flower is with label: 3

PART B¶

1.A.

In [256]:
import tflearn
import tflearn.datasets.oxflower17 as oxflower17
In [257]:
import random
random.seed(0)
In [258]:
x, y = oxflower17.load_data()

1.B. & 1.C.

In [259]:
print("shape of x ", x.shape)
shape of x  (1360, 224, 224, 3)

There are a total of 1360 images of size 224, 224, 3

In [260]:
print("shape of y ", y.shape)
shape of y  (1360,)
In [261]:
images_counts = []
labels = []
for label in set(y):
  images_counts.append(len(x[y == label]))
  labels.append(label)
  print("Label : ", label, "-> number of images : ", len(x[y == label]))

fig, ax = plt.subplots(figsize=(20,10))
ax.bar(labels, images_counts)
plt.xlabel("Class")
plt.ylabel("Number of images")
plt.show()
Label :  0 -> number of images :  80
Label :  1 -> number of images :  80
Label :  2 -> number of images :  80
Label :  3 -> number of images :  80
Label :  4 -> number of images :  80
Label :  5 -> number of images :  80
Label :  6 -> number of images :  80
Label :  7 -> number of images :  80
Label :  8 -> number of images :  80
Label :  9 -> number of images :  80
Label :  10 -> number of images :  80
Label :  11 -> number of images :  80
Label :  12 -> number of images :  80
Label :  13 -> number of images :  80
Label :  14 -> number of images :  80
Label :  15 -> number of images :  80
Label :  16 -> number of images :  80

All labels have 80 images each

2.A.

In [262]:
labels
Out[262]:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
In [ ]:
 
In [263]:
def show_random_image(data, n):
    random_images = np.random.choice(len(x), size=5)

    for i in range(n):
        plt.imshow(x[i])
        plt.axis("off")
        plt.show()
        print("Label : ", y[i], "\n\n")
show_random_image(x, 5)
Label :  8 


Label :  12 


Label :  16 


Label :  11 


Label :  5 


2.B.

In [264]:
# selecting random image
random_image = x[np.random.choice(len(x))].copy()
plt.imshow(random_image, cmap='gray')
plt.axis("off")
Out[264]:
(-0.5, 223.5, 223.5, -0.5)

2.C.

In [265]:
from skimage import color
In [266]:
gray_img = random_image.copy()
for clr in range(random_image.shape[2]):
    gray_img[:,:,clr] = random_image.mean(axis=2)
plt.imshow(gray_img)
plt.axis("off")
Out[266]:
(-0.5, 223.5, 223.5, -0.5)

2.D.

In [267]:
def sharpen_image(image):
    kernel = np.array([ [0, -1, 0],
                        [-1, 5,-1],
                        [0, -1, 0]])
    image_sharp = cv2.filter2D(image, ddepth=-1, kernel=kernel)
    return image_sharp

sharp_img = sharpen_image(random_image)

f, axarr = plt.subplots(1,2, figsize=(15,15))
axarr[0].imshow(random_image)
axarr[0].set_title("Before Sharpening")
axarr[0].axis('off')
axarr[1].imshow(sharp_img)
axarr[1].set_title("After Sharpening")
axarr[1].axis('off')
plt.show()
Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).

2.E.

In [268]:
def blur_image(image):
    image_blur = cv2.blur(image,(5,5))
    return image_blur

blur_img = blur_image(random_image)

f, axarr = plt.subplots(1,2, figsize=(15,15))
axarr[0].imshow(random_image)
axarr[0].set_title("Before Blur")
axarr[0].axis('off')
axarr[1].imshow(blur_img)
axarr[1].set_title("After Blur")
axarr[1].axis('off')
plt.show()

2.F.

In [269]:
f, axarr = plt.subplots(2,2, figsize=(15,15))
axarr[0,0].imshow(random_image)
axarr[0,0].set_title("Original")
axarr[0,0].axis('off')
axarr[0,1].imshow(gray_img)
axarr[0,1].set_title("Grayscale")
axarr[0,1].axis('off')
axarr[1,0].imshow(sharp_img)
axarr[1,0].set_title("Sharp")
axarr[1,0].axis('off')
axarr[1,1].imshow(blur_img)
axarr[1,1].set_title("Blur")
axarr[1,1].axis('off')
plt.show()
Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).

3.A.

In [307]:
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=0, shuffle=True)
In [308]:
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)
(1088, 224, 224, 3) (272, 224, 224, 3) (1088,) (272,)
In [309]:
random_seed = 2
X_test, X_val, y_test, y_val = train_test_split(X_test, y_test, test_size = 0.5, random_state=random_seed)
In [310]:
print(X_test.shape, X_val.shape, y_test.shape, y_val.shape)
(136, 224, 224, 3) (136, 224, 224, 3) (136,) (136,)

3.B.

Supervised Learning algorithm

In [312]:
def flatten(X):
  Xr, Xg, Xb = [],[],[]
  for idx, samples in enumerate(X):
    try:
        r, g, b = cv2.split(samples)
    except ValueError:
        continue
    Xr.append(r.flatten())
    Xg.append(g.flatten())
    Xb.append(b.flatten())
  Xr = np.array(Xr)
  Xg = np.array(Xg)
  Xb = np.array(Xb)
  return (Xr, Xg, Xb)
In [317]:
X_train_r, X_train_g, X_train_b = flatten(X_train)
X_test_r, X_test_g, X_test_b = flatten(X_test)
X_val_r, X_val_g, X_val_b = flatten(X_val)
In [318]:
X_train_r.shape, X_train_g.shape, X_train_b.shape,
X_test_r.shape, X_test_g.shape, X_test_b.shape,
X_val_r.shape, X_val_g.shape, X_val_b.shape
Out[318]:
((136, 50176), (136, 50176), (136, 50176))
In [320]:
n_components = 100
X_train_pca_r = RandomizedPCA(n_components=n_components, whiten=True).fit(X_train_r)
X_train_pca_g = RandomizedPCA(n_components=n_components, whiten=True).fit(X_train_g)
X_train_pca_b = RandomizedPCA(n_components=n_components, whiten=True).fit(X_train_b)

X_test_pca_r = RandomizedPCA(n_components=n_components, whiten=True).fit(X_test_r)
X_test_pca_g = RandomizedPCA(n_components=n_components, whiten=True).fit(X_test_g)
X_test_pca_b = RandomizedPCA(n_components=n_components, whiten=True).fit(X_test_b)

X_val_pca_r = RandomizedPCA(n_components=n_components, whiten=True).fit(X_val_r)
X_val_pca_g = RandomizedPCA(n_components=n_components, whiten=True).fit(X_val_g)
X_val_pca_b = RandomizedPCA(n_components=n_components, whiten=True).fit(X_val_b)
In [321]:
Xr_train_pca = X_train_pca_r.transform(X_train_r)
Xg_train_pca = X_train_pca_g.transform(X_train_g)
Xb_train_pca = X_train_pca_b.transform(X_train_b)

Xr_test_pca = X_test_pca_r.transform(X_test_r)
Xg_test_pca = X_test_pca_g.transform(X_test_g)
Xb_test_pca = X_test_pca_b.transform(X_test_b)

Xr_val_pca = X_val_pca_r.transform(X_val_r)
Xg_val_pca = X_val_pca_g.transform(X_val_g)
Xb_val_pca = X_val_pca_b.transform(X_val_b)
In [322]:
X_train_pca_r.explained_variance_ratio_.sum(), X_train_pca_g.explained_variance_ratio_.sum(), X_train_pca_b.explained_variance_ratio_.sum(),
X_test_pca_r.explained_variance_ratio_.sum(), X_test_pca_g.explained_variance_ratio_.sum(), X_test_pca_b.explained_variance_ratio_.sum(),
X_val_pca_r.explained_variance_ratio_.sum(), X_val_pca_g.explained_variance_ratio_.sum(), X_val_pca_b.explained_variance_ratio_.sum()
Out[322]:
(0.95320654, 0.95023745, 0.96793455)
In [323]:
X_train_pca = np.concatenate([Xr_train_pca,Xg_train_pca,Xb_train_pca], axis=1)
X_test_pca = np.concatenate([Xr_test_pca,Xg_test_pca,Xb_test_pca], axis=1)
X_val_pca = np.concatenate([Xr_val_pca,Xg_val_pca,Xb_val_pca], axis=1)
In [324]:
X_train_pca.shape, y_train.shape,
X_test_pca.shape, y_test.shape,
X_val_pca.shape, y_val.shape
Out[324]:
((136, 300), (136,))
In [326]:
param_grid = [
  {'C': [1, 10, 100, 1000], 'kernel': ['linear']},
  {'C': [1, 10, 100, 1000], 'gamma': [0.001, 0.0001], 'kernel': ['rbf']},
 ]
svc = SVC()
clf = GridSearchCV(svc, param_grid, verbose=10, n_jobs=-1)
clf.fit(X_train_pca, y_train)
Fitting 5 folds for each of 12 candidates, totalling 60 fits
Out[326]:
GridSearchCV(estimator=SVC(), n_jobs=-1,
             param_grid=[{'C': [1, 10, 100, 1000], 'kernel': ['linear']},
                         {'C': [1, 10, 100, 1000], 'gamma': [0.001, 0.0001],
                          'kernel': ['rbf']}],
             verbose=10)
In [331]:
y_pred = clf.predict(X_test_pca)
svm_accuracy = metrics.accuracy_score(y_test, y_pred)
print("SVM accuracy : ", svm_accuracy)
SVM accuracy :  0.09558823529411764

Prediction with SVM

In [340]:
img_pred_flower = cv2.imread(os.path.join(folderPath, 'Prediction.jpg'))
img_pred_resize = cv2.resize(img_pred_flower, (224, 224), interpolation = cv2.INTER_CUBIC)
plt.imshow(img_pred_resize)
plt.axis('off')
img_prediction = np.expand_dims(img_pred_resize, axis=0)
In [349]:
pred_img = np.squeeze(img_prediction, axis=0)
X_pred_r, X_pred_g, X_pred_b = cv2.split(pred_img)
In [350]:
print(f"The shape of image to be predicted is:'{img_pred_flower.shape}'")
print(f"The shape of image to be predicted after expanding the dimensions is:'{img_prediction.shape}'")
The shape of image to be predicted is:'(224, 224, 3)'
The shape of image to be predicted after expanding the dimensions is:'(1, 224, 224, 3)'
In [351]:
X_pred_pca_r = X_train_pca_r.transform(np.expand_dims(X_pred_r.flatten(), axis=0))
X_pred_pca_g = X_train_pca_g.transform(np.expand_dims(X_pred_g.flatten(), axis=0))
X_pred_pca_b = X_train_pca_b.transform(np.expand_dims(X_pred_b.flatten(), axis=0))

X_pred_pca = np.concatenate([X_pred_pca_r,X_pred_pca_g,X_pred_pca_b], axis=1)
In [352]:
pred_svm = clf.predict(X_pred_pca)[0]
print(f"The predicted label is:'{pred_svm}'")
The predicted label is:'1'
In [353]:
model_performance_b = pd.DataFrame(columns=['Model', 'Accuracy', 'Loss','Predicted Class'])
In [354]:
model_performance_b = model_performance_b.append({'Model':'SVM',
                                              'Accuracy': svm_accuracy,
                                              'Loss': 'NA',
                                              'Predicted Class': pred_svm                                      
                                              }, ignore_index=True)

model_performance_b
Out[354]:
Model Accuracy Loss Predicted Class
0 SVM 0.095588 NA 1

3.C.

In [381]:
num_classes = len(np.unique(y))
num_classes
Out[381]:
17
In [382]:
y_train = tf.keras.utils.to_categorical(y_train, num_classes=num_classes)
y_test = tf.keras.utils.to_categorical(y_test, num_classes=num_classes)
y_val = tf.keras.utils.to_categorical(y_val, num_classes=num_classes)
In [398]:
enc = LabelBinarizer()
y2 = enc.fit_transform(y)
In [401]:
X_train, X_test, y_train, y_test = train_test_split(x, y2, test_size=0.2, random_state=50)
In [402]:
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)
(1088, 224, 224, 3) (272, 224, 224, 3) (1088, 17) (272, 17)
In [403]:
X_test, X_val, y_test, y_val = train_test_split(X_test, y_test, test_size = 0.5, random_state=random_seed)
In [404]:
print(X_test.shape, X_val.shape, y_test.shape, y_val.shape)
(136, 224, 224, 3) (136, 224, 224, 3) (136, 17) (136, 17)
In [405]:
def nn_model(height, width, num_channels, num_classes, loss='categorical_crossentropy', metrics=['accuracy']):
  model = Sequential()

  model.add(InputLayer(input_shape=(height, width, num_channels)))

  model.add(Flatten())
  model.add(BatchNormalization())
  model.add(Dense(1024, activation='relu'))
  model.add(Dropout(0.2))

  model.add(BatchNormalization())
  model.add(Dense(512, activation='relu'))
  model.add(Dropout(0.2))

  model.add(BatchNormalization())
  model.add(Dense(256, activation='relu'))
  model.add(Dropout(0.2))

  model.add(BatchNormalization())
  model.add(Dense(num_classes, activation = "softmax"))
  opt = tf.keras.optimizers.Adam(lr=0.000001)
  model.compile(optimizer = opt, loss = loss, metrics = metrics)

  model.summary()
  return model
In [406]:
nn = nn_model(224, 224, 3, 17)
Model: "sequential_4"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_3 (Flatten)         (None, 150528)            0         
                                                                 
 batch_normalization_12 (Bat  (None, 150528)           602112    
 chNormalization)                                                
                                                                 
 dense_14 (Dense)            (None, 1024)              154141696 
                                                                 
 dropout_13 (Dropout)        (None, 1024)              0         
                                                                 
 batch_normalization_13 (Bat  (None, 1024)             4096      
 chNormalization)                                                
                                                                 
 dense_15 (Dense)            (None, 512)               524800    
                                                                 
 dropout_14 (Dropout)        (None, 512)               0         
                                                                 
 batch_normalization_14 (Bat  (None, 512)              2048      
 chNormalization)                                                
                                                                 
 dense_16 (Dense)            (None, 256)               131328    
                                                                 
 dropout_15 (Dropout)        (None, 256)               0         
                                                                 
 batch_normalization_15 (Bat  (None, 256)              1024      
 chNormalization)                                                
                                                                 
 dense_17 (Dense)            (None, 17)                4369      
                                                                 
=================================================================
Total params: 155,411,473
Trainable params: 155,106,833
Non-trainable params: 304,640
_________________________________________________________________
In [407]:
nn_history = nn.fit(X_train, 
                  y_train, 
                  epochs = 50, 
                  validation_data = (X_val, y_val),
                  batch_size = batch_size)
Train on 1088 samples, validate on 136 samples
Epoch 1/50
1088/1088 [==============================] - 21s 19ms/sample - loss: 3.4790 - acc: 0.0754 - val_loss: 2.7836 - val_acc: 0.0662
Epoch 2/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 2.9554 - acc: 0.1397 - val_loss: 2.7036 - val_acc: 0.1544
Epoch 3/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 2.7146 - acc: 0.2031 - val_loss: 2.6353 - val_acc: 0.1912
Epoch 4/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 2.4631 - acc: 0.2509 - val_loss: 2.5718 - val_acc: 0.2426
Epoch 5/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 2.2798 - acc: 0.3125 - val_loss: 2.5009 - val_acc: 0.2647
Epoch 6/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 2.1463 - acc: 0.3382 - val_loss: 2.4271 - val_acc: 0.2794
Epoch 7/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 1.9715 - acc: 0.3961 - val_loss: 2.3531 - val_acc: 0.2868
Epoch 8/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 1.9288 - acc: 0.3998 - val_loss: 2.2814 - val_acc: 0.3088
Epoch 9/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 1.7662 - acc: 0.4449 - val_loss: 2.2219 - val_acc: 0.3456
Epoch 10/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 1.6806 - acc: 0.4540 - val_loss: 2.1784 - val_acc: 0.3382
Epoch 11/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 1.5769 - acc: 0.5064 - val_loss: 2.1254 - val_acc: 0.3456
Epoch 12/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 1.4902 - acc: 0.5404 - val_loss: 2.0843 - val_acc: 0.3456
Epoch 13/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 1.4199 - acc: 0.5717 - val_loss: 2.0684 - val_acc: 0.3676
Epoch 14/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 1.3446 - acc: 0.5836 - val_loss: 2.0448 - val_acc: 0.3676
Epoch 15/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 1.2901 - acc: 0.5928 - val_loss: 2.0286 - val_acc: 0.3603
Epoch 16/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 1.2371 - acc: 0.6452 - val_loss: 2.0221 - val_acc: 0.3676
Epoch 17/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 1.1717 - acc: 0.6434 - val_loss: 1.9945 - val_acc: 0.4044
Epoch 18/50
1088/1088 [==============================] - 19s 17ms/sample - loss: 1.1315 - acc: 0.6774 - val_loss: 1.9832 - val_acc: 0.4191
Epoch 19/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 1.0700 - acc: 0.6912 - val_loss: 2.0001 - val_acc: 0.3897
Epoch 20/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.9988 - acc: 0.7289 - val_loss: 2.0016 - val_acc: 0.3971
Epoch 21/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 1.0237 - acc: 0.7031 - val_loss: 1.9815 - val_acc: 0.4118
Epoch 22/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.9606 - acc: 0.7316 - val_loss: 1.9824 - val_acc: 0.4044
Epoch 23/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.9270 - acc: 0.7537 - val_loss: 1.9704 - val_acc: 0.4191
Epoch 24/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.9367 - acc: 0.7546 - val_loss: 1.9764 - val_acc: 0.3971
Epoch 25/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.8998 - acc: 0.7528 - val_loss: 1.9662 - val_acc: 0.3824
Epoch 26/50
1088/1088 [==============================] - 17s 15ms/sample - loss: 0.8095 - acc: 0.7868 - val_loss: 1.9696 - val_acc: 0.3824
Epoch 27/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.7614 - acc: 0.8208 - val_loss: 1.9545 - val_acc: 0.3824
Epoch 28/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 0.7420 - acc: 0.8226 - val_loss: 1.9388 - val_acc: 0.3676
Epoch 29/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.7390 - acc: 0.8290 - val_loss: 1.9309 - val_acc: 0.4118
Epoch 30/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.7173 - acc: 0.8346 - val_loss: 1.9221 - val_acc: 0.4118
Epoch 31/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.6629 - acc: 0.8575 - val_loss: 1.9177 - val_acc: 0.3971
Epoch 32/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.6924 - acc: 0.8465 - val_loss: 1.9187 - val_acc: 0.4044
Epoch 33/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.6483 - acc: 0.8594 - val_loss: 1.9196 - val_acc: 0.4118
Epoch 34/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.6089 - acc: 0.8585 - val_loss: 1.9395 - val_acc: 0.4044
Epoch 35/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 0.6310 - acc: 0.8566 - val_loss: 1.9327 - val_acc: 0.3971
Epoch 36/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.5830 - acc: 0.8805 - val_loss: 1.9306 - val_acc: 0.4338
Epoch 37/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.5514 - acc: 0.8925 - val_loss: 1.9464 - val_acc: 0.4412
Epoch 38/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.5652 - acc: 0.8897 - val_loss: 1.9553 - val_acc: 0.4191
Epoch 39/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.5084 - acc: 0.9044 - val_loss: 1.9541 - val_acc: 0.4191
Epoch 40/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.5008 - acc: 0.9127 - val_loss: 1.9472 - val_acc: 0.4191
Epoch 41/50
1088/1088 [==============================] - 17s 15ms/sample - loss: 0.5139 - acc: 0.9081 - val_loss: 1.9367 - val_acc: 0.3971
Epoch 42/50
1088/1088 [==============================] - 18s 17ms/sample - loss: 0.4803 - acc: 0.8989 - val_loss: 1.9446 - val_acc: 0.4191
Epoch 43/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.4847 - acc: 0.9072 - val_loss: 1.9314 - val_acc: 0.4044
Epoch 44/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.4627 - acc: 0.9136 - val_loss: 1.9247 - val_acc: 0.4118
Epoch 45/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.4376 - acc: 0.9210 - val_loss: 1.9181 - val_acc: 0.4044
Epoch 46/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.4114 - acc: 0.9403 - val_loss: 1.9134 - val_acc: 0.4118
Epoch 47/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.4313 - acc: 0.9366 - val_loss: 1.9151 - val_acc: 0.4191
Epoch 48/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.4016 - acc: 0.9384 - val_loss: 1.9168 - val_acc: 0.4118
Epoch 49/50
1088/1088 [==============================] - 17s 16ms/sample - loss: 0.3970 - acc: 0.9430 - val_loss: 1.9174 - val_acc: 0.4044
Epoch 50/50
1088/1088 [==============================] - 18s 16ms/sample - loss: 0.3844 - acc: 0.9494 - val_loss: 1.9110 - val_acc: 0.4118
In [408]:
nn_loss, nn_accuracy = nn.evaluate(X_test, y_test, verbose=1)
print('Test loss:', nn_loss)
print('Test accuracy:', nn_accuracy)
Test loss: 1.6525085533366484
Test accuracy: 0.4779412
In [411]:
fig, ax = plt.subplots(2,1 , figsize=(20,10))
ax[0].plot(nn_history.history['loss'], color='b', label="Training loss")
ax[0].plot(nn_history.history['val_loss'], color='r', label="validation loss",axes =ax[0])
legend = ax[0].legend(loc='best', shadow=True)

ax[1].plot(nn_history.history['acc'], color='b', label="Training accuracy")
ax[1].plot(nn_history.history['val_acc'], color='r',label="Validation accuracy")
legend = ax[1].legend(loc='best', shadow=True)
In [412]:
nn_predict = np.argmax(nn.predict(img_prediction))
print('The predicted flower is with label:', nn_predict)
The predicted flower is with label: 3
In [413]:
model_performance_b = model_performance_b.append({'Model':'NN',
                                              'Accuracy': nn_accuracy,
                                              'Loss': nn_loss,
                                              'Predicted Class': nn_predict                                      
                                              }, ignore_index=True)

model_performance_b
Out[413]:
Model Accuracy Loss Predicted Class
0 SVM 0.095588 NA 1
1 NN 0.477941 1.652509 3

3.D.

In [456]:
y_train = tf.keras.utils.to_categorical(y_train, num_classes=num_classes)
y_test = tf.keras.utils.to_categorical(y_test, num_classes=num_classes)
y_val = tf.keras.utils.to_categorical(y_val, num_classes=num_classes)
In [465]:
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=50)
In [466]:
X_test, X_val, y_test, y_val = train_test_split(X_test, y_test, test_size = 0.5, random_state=random_seed)
In [467]:
def basic_cnn_model(height, width, num_channels, num_classes, loss='sparse_categorical_crossentropy', metrics=['accuracy']):
  batch_size = None

  model = Sequential()

  model.add(Conv2D(filters = 32, kernel_size = (5,5),padding = 'Same', 
                  activation ='relu', batch_input_shape = (batch_size,height, width, num_channels)))


  model.add(Conv2D(filters = 32, kernel_size = (5,5),padding = 'Same', 
                  activation ='relu'))
  model.add(MaxPool2D(pool_size=(2,2)))
  model.add(Dropout(0.2))


  model.add(Conv2D(filters = 64, kernel_size = (3,3),padding = 'Same', 
                  activation ='relu'))
  model.add(Conv2D(filters = 64, kernel_size = (3,3),padding = 'same', 
                  activation ='relu'))
  model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))
  model.add(Dropout(0.3))

  model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same', 
                  activation ='relu'))
  model.add(Conv2D(filters = 128, kernel_size = (3,3),padding = 'Same', 
                  activation ='relu'))
  model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))
  model.add(Dropout(0.4))



  model.add(GlobalMaxPooling2D())
  model.add(Dense(256, activation = "relu"))
  model.add(Dropout(0.5))
  model.add(Dense(num_classes, activation = "softmax"))

  optimizer = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
  model.compile(optimizer = optimizer, loss = loss, metrics = metrics)
  model.summary()
  return model
In [468]:
basic_cnn = basic_cnn_model(224,224,3,17)
Model: "sequential_13"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_30 (Conv2D)          (None, 224, 224, 32)      2432      
                                                                 
 conv2d_31 (Conv2D)          (None, 224, 224, 32)      25632     
                                                                 
 max_pooling2d_17 (MaxPoolin  (None, 112, 112, 32)     0         
 g2D)                                                            
                                                                 
 dropout_33 (Dropout)        (None, 112, 112, 32)      0         
                                                                 
 conv2d_32 (Conv2D)          (None, 112, 112, 64)      18496     
                                                                 
 conv2d_33 (Conv2D)          (None, 112, 112, 64)      36928     
                                                                 
 max_pooling2d_18 (MaxPoolin  (None, 56, 56, 64)       0         
 g2D)                                                            
                                                                 
 dropout_34 (Dropout)        (None, 56, 56, 64)        0         
                                                                 
 conv2d_34 (Conv2D)          (None, 56, 56, 128)       73856     
                                                                 
 conv2d_35 (Conv2D)          (None, 56, 56, 128)       147584    
                                                                 
 max_pooling2d_19 (MaxPoolin  (None, 28, 28, 128)      0         
 g2D)                                                            
                                                                 
 dropout_35 (Dropout)        (None, 28, 28, 128)       0         
                                                                 
 global_max_pooling2d_4 (Glo  (None, 128)              0         
 balMaxPooling2D)                                                
                                                                 
 dense_34 (Dense)            (None, 256)               33024     
                                                                 
 dropout_36 (Dropout)        (None, 256)               0         
                                                                 
 dense_35 (Dense)            (None, 17)                4369      
                                                                 
=================================================================
Total params: 342,321
Trainable params: 342,321
Non-trainable params: 0
_________________________________________________________________
In [469]:
basic_cnn_history = basic_cnn.fit(X_train, 
                      y_train, 
                      epochs = 50, 
                      validation_data = (X_val, y_val),
                      batch_size = batch_size)
Train on 1088 samples, validate on 136 samples
Epoch 1/50
1088/1088 [==============================] - 155s 143ms/sample - loss: 2.8775 - acc: 0.0625 - val_loss: 2.8201 - val_acc: 0.0662
Epoch 2/50
1088/1088 [==============================] - 161s 148ms/sample - loss: 2.7650 - acc: 0.0882 - val_loss: 2.7414 - val_acc: 0.1176
Epoch 3/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 2.6874 - acc: 0.1140 - val_loss: 2.6115 - val_acc: 0.1176
Epoch 4/50
1088/1088 [==============================] - 175s 161ms/sample - loss: 2.5300 - acc: 0.1397 - val_loss: 2.5075 - val_acc: 0.1544
Epoch 5/50
1088/1088 [==============================] - 175s 161ms/sample - loss: 2.4121 - acc: 0.1691 - val_loss: 2.5883 - val_acc: 0.2353
Epoch 6/50
1088/1088 [==============================] - 176s 162ms/sample - loss: 2.3697 - acc: 0.1829 - val_loss: 2.2101 - val_acc: 0.2574
Epoch 7/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 2.2689 - acc: 0.2114 - val_loss: 2.1247 - val_acc: 0.2721
Epoch 8/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 2.2241 - acc: 0.2142 - val_loss: 2.4633 - val_acc: 0.1765
Epoch 9/50
1088/1088 [==============================] - 175s 161ms/sample - loss: 2.1503 - acc: 0.2298 - val_loss: 2.1975 - val_acc: 0.2868
Epoch 10/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 2.1491 - acc: 0.2454 - val_loss: 2.1054 - val_acc: 0.3162
Epoch 11/50
1088/1088 [==============================] - 175s 161ms/sample - loss: 2.0577 - acc: 0.2840 - val_loss: 2.1197 - val_acc: 0.2721
Epoch 12/50
1088/1088 [==============================] - 173s 159ms/sample - loss: 1.9978 - acc: 0.3015 - val_loss: 1.9267 - val_acc: 0.3309
Epoch 13/50
1088/1088 [==============================] - 162s 149ms/sample - loss: 1.9475 - acc: 0.3208 - val_loss: 1.9783 - val_acc: 0.3235
Epoch 14/50
1088/1088 [==============================] - 160s 147ms/sample - loss: 1.8781 - acc: 0.3539 - val_loss: 2.0327 - val_acc: 0.3456
Epoch 15/50
1088/1088 [==============================] - 160s 147ms/sample - loss: 1.8313 - acc: 0.3722 - val_loss: 1.9062 - val_acc: 0.3676
Epoch 16/50
1088/1088 [==============================] - 161s 148ms/sample - loss: 1.7176 - acc: 0.4329 - val_loss: 1.9328 - val_acc: 0.3456
Epoch 17/50
1088/1088 [==============================] - 175s 160ms/sample - loss: 1.6785 - acc: 0.4412 - val_loss: 1.7975 - val_acc: 0.4191
Epoch 18/50
1088/1088 [==============================] - 175s 161ms/sample - loss: 1.6258 - acc: 0.4596 - val_loss: 1.7828 - val_acc: 0.4632
Epoch 19/50
1088/1088 [==============================] - 173s 159ms/sample - loss: 1.4950 - acc: 0.5028 - val_loss: 1.6579 - val_acc: 0.3750
Epoch 20/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 1.5081 - acc: 0.5110 - val_loss: 1.5128 - val_acc: 0.5515
Epoch 21/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 1.4537 - acc: 0.5028 - val_loss: 1.8093 - val_acc: 0.4338
Epoch 22/50
1088/1088 [==============================] - 174s 160ms/sample - loss: 1.3966 - acc: 0.5221 - val_loss: 1.5237 - val_acc: 0.5882
Epoch 23/50
1088/1088 [==============================] - 173s 159ms/sample - loss: 1.3637 - acc: 0.5469 - val_loss: 1.3699 - val_acc: 0.6103
Epoch 24/50
1088/1088 [==============================] - 170s 157ms/sample - loss: 1.3190 - acc: 0.5726 - val_loss: 1.6541 - val_acc: 0.4559
Epoch 25/50
1088/1088 [==============================] - 159s 146ms/sample - loss: 1.2209 - acc: 0.5928 - val_loss: 1.3911 - val_acc: 0.5662
Epoch 26/50
1088/1088 [==============================] - 159s 146ms/sample - loss: 1.2021 - acc: 0.5855 - val_loss: 1.3077 - val_acc: 0.6324
Epoch 27/50
1088/1088 [==============================] - 160s 147ms/sample - loss: 1.1763 - acc: 0.5974 - val_loss: 1.4428 - val_acc: 0.6103
Epoch 28/50
1088/1088 [==============================] - 160s 147ms/sample - loss: 1.1639 - acc: 0.6121 - val_loss: 1.3605 - val_acc: 0.5588
Epoch 29/50
1088/1088 [==============================] - 191s 176ms/sample - loss: 1.1061 - acc: 0.6526 - val_loss: 1.3622 - val_acc: 0.5809
Epoch 30/50
1088/1088 [==============================] - 178s 164ms/sample - loss: 1.0643 - acc: 0.6507 - val_loss: 1.3195 - val_acc: 0.6397
Epoch 31/50
1088/1088 [==============================] - 179s 165ms/sample - loss: 1.0432 - acc: 0.6489 - val_loss: 1.2208 - val_acc: 0.6397
Epoch 32/50
1088/1088 [==============================] - 178s 164ms/sample - loss: 0.9832 - acc: 0.6756 - val_loss: 1.3806 - val_acc: 0.5515
Epoch 33/50
1088/1088 [==============================] - 175s 161ms/sample - loss: 1.0320 - acc: 0.6581 - val_loss: 1.2401 - val_acc: 0.6029
Epoch 34/50
1088/1088 [==============================] - 177s 162ms/sample - loss: 0.9826 - acc: 0.6792 - val_loss: 1.1128 - val_acc: 0.6838
Epoch 35/50
1088/1088 [==============================] - 177s 163ms/sample - loss: 0.9362 - acc: 0.6774 - val_loss: 1.4517 - val_acc: 0.4632
Epoch 36/50
1088/1088 [==============================] - 177s 163ms/sample - loss: 0.8822 - acc: 0.7096 - val_loss: 1.4101 - val_acc: 0.5147
Epoch 37/50
1088/1088 [==============================] - 178s 164ms/sample - loss: 0.9156 - acc: 0.7022 - val_loss: 1.6059 - val_acc: 0.4853
Epoch 38/50
1088/1088 [==============================] - 177s 163ms/sample - loss: 0.8610 - acc: 0.7105 - val_loss: 1.3022 - val_acc: 0.6103
Epoch 39/50
1088/1088 [==============================] - 180s 165ms/sample - loss: 0.8342 - acc: 0.7206 - val_loss: 1.0377 - val_acc: 0.7279
Epoch 40/50
1088/1088 [==============================] - 173s 159ms/sample - loss: 0.7790 - acc: 0.7215 - val_loss: 1.0144 - val_acc: 0.7132
Epoch 41/50
1088/1088 [==============================] - 173s 159ms/sample - loss: 0.8037 - acc: 0.7123 - val_loss: 1.2993 - val_acc: 0.6103
Epoch 42/50
1088/1088 [==============================] - 181s 166ms/sample - loss: 0.7382 - acc: 0.7445 - val_loss: 1.2273 - val_acc: 0.5956
Epoch 43/50
1088/1088 [==============================] - 172s 158ms/sample - loss: 0.7573 - acc: 0.7417 - val_loss: 1.0361 - val_acc: 0.7059
Epoch 44/50
1088/1088 [==============================] - 191s 176ms/sample - loss: 0.7168 - acc: 0.7463 - val_loss: 1.1499 - val_acc: 0.6471
Epoch 45/50
1088/1088 [==============================] - 217s 200ms/sample - loss: 0.7049 - acc: 0.7702 - val_loss: 1.0506 - val_acc: 0.6618
Epoch 46/50
1088/1088 [==============================] - 193s 177ms/sample - loss: 0.7014 - acc: 0.7675 - val_loss: 1.1337 - val_acc: 0.6838
Epoch 47/50
1088/1088 [==============================] - 182s 168ms/sample - loss: 0.6639 - acc: 0.7767 - val_loss: 1.1797 - val_acc: 0.6691
Epoch 48/50
1088/1088 [==============================] - 201s 185ms/sample - loss: 0.6589 - acc: 0.7739 - val_loss: 1.0579 - val_acc: 0.6544
Epoch 49/50
1088/1088 [==============================] - 183s 168ms/sample - loss: 0.6582 - acc: 0.7831 - val_loss: 1.1378 - val_acc: 0.6029
Epoch 50/50
1088/1088 [==============================] - 190s 174ms/sample - loss: 0.6063 - acc: 0.7858 - val_loss: 1.0416 - val_acc: 0.7059
In [470]:
basic_cnn_loss, basic_cnn_accuracy = basic_cnn.evaluate(X_test, y_test, verbose=1)
print('Test loss:', basic_cnn_loss)
print('Test accuracy:', basic_cnn_accuracy)
Test loss: 0.7410718623329612
Test accuracy: 0.7647059

1.E.

In [471]:
basic_cnn_predict = np.argmax(basic_cnn.predict(img_prediction))
print('The predicted flower is with label:', basic_cnn_predict)
The predicted flower is with label: 2
In [472]:
model_performance_b = model_performance_b.append({'Model':'CNN',
                                              'Accuracy': basic_cnn_accuracy,
                                              'Loss': basic_cnn_loss,
                                              'Predicted Class': basic_cnn_predict                                      
                                              }, ignore_index=True)
In [473]:
model_performance_b
Out[473]:
Model Accuracy Loss Predicted Class
0 SVM 0.095588 NA 1
1 NN 0.477941 1.652509 3
2 CNN 0.764706 0.741072 2
  • Different model have pridected differently.
  • CNN has the highest accuracy